@datapos/datapos-development 0.3.114 → 0.3.116

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,35 +1,35 @@
1
- import { exec as m } from "node:child_process";
2
- import { promises as n } from "node:fs";
3
- import { nanoid as y } from "nanoid";
4
- import { promisify as w } from "node:util";
5
- const h = ["createObject", "dropObject", "removeRecords", "upsertRecords"], b = ["findObject", "getRecord", "listNodes", "previewObject", "retrieveRecords"], g = w(m);
6
- async function S() {
1
+ import { exec as h } from "node:child_process";
2
+ import { promises as t } from "node:fs";
3
+ import { nanoid as b } from "nanoid";
4
+ import { promisify as $ } from "node:util";
5
+ const E = ["createObject", "dropObject", "removeRecords", "upsertRecords"], v = ["findObject", "getRecord", "listNodes", "previewObject", "retrieveRecords"], w = $(h);
6
+ async function j() {
7
7
  try {
8
8
  console.info("🚀 Building configuration...");
9
- const o = JSON.parse(await n.readFile("package.json", "utf8")), e = JSON.parse(await n.readFile("config.json", "utf8"));
10
- o.name != null && (e.id = o.name.replace("@datapos/", "").replace("@data-positioning/", "")), o.version != null && (e.version = o.version), await n.writeFile("config.json", JSON.stringify(e, void 0, 4), "utf8"), console.info("✅ Configuration built.");
9
+ const o = JSON.parse(await t.readFile("package.json", "utf8")), e = JSON.parse(await t.readFile("config.json", "utf8"));
10
+ o.name != null && (e.id = o.name.replace("@datapos/", "").replace("@data-positioning/", "")), o.version != null && (e.version = o.version), await t.writeFile("config.json", JSON.stringify(e, void 0, 4), "utf8"), console.info("✅ Configuration built.");
11
11
  } catch (o) {
12
12
  console.error("❌ Error building configuration.", o);
13
13
  }
14
14
  }
15
- async function O(o) {
15
+ async function C(o) {
16
16
  try {
17
17
  console.info(`🚀 Building public directory index for identifier '${o}'...`);
18
18
  const e = {};
19
- async function i(r, s) {
20
- console.info(`âš™ī¸ Processing directory '${r}'...`);
21
- const d = [], a = r.substring(`public/${o}`.length);
19
+ async function i(s, r) {
20
+ console.info(`âš™ī¸ Processing directory '${s}'...`);
21
+ const d = [], a = s.substring(`public/${o}`.length);
22
22
  e[a] = d;
23
- for (const c of s) {
24
- const l = `${r}/${c}`;
23
+ for (const c of r) {
24
+ const l = `${s}/${c}`;
25
25
  try {
26
- const f = await n.stat(l);
26
+ const f = await t.stat(l);
27
27
  if (f.isDirectory()) {
28
- const u = await n.readdir(l), p = { childCount: u.length, name: `${c}`, typeId: "folder" };
29
- d.push(p), await i(l, u);
28
+ const p = await t.readdir(l), u = { childCount: p.length, name: `${c}`, typeId: "folder" };
29
+ d.push(u), await i(l, p);
30
30
  } else {
31
- const u = { id: y(), lastModifiedAt: f.mtimeMs, name: c, size: f.size, typeId: "object" };
32
- d.push(u);
31
+ const p = { id: b(), lastModifiedAt: f.mtimeMs, name: c, size: f.size, typeId: "object" };
32
+ d.push(p);
33
33
  }
34
34
  } catch (f) {
35
35
  throw new Error(`Unable to get information for '${c}' in 'buildPublicDirectoryIndex'. ${String(f)}`);
@@ -40,55 +40,55 @@ async function O(o) {
40
40
  return f === 0 ? c.name.localeCompare(l.name) : f;
41
41
  });
42
42
  }
43
- const t = await n.readdir(`public/${o}`);
44
- await i(`public/${o}`, t), await n.writeFile(`./public/${o}Index.json`, JSON.stringify(e), "utf8"), console.info("✅ Public directory index built.");
43
+ const n = await t.readdir(`public/${o}`);
44
+ await i(`public/${o}`, n), await t.writeFile(`./public/${o}Index.json`, JSON.stringify(e), "utf8"), console.info("✅ Public directory index built.");
45
45
  } catch (e) {
46
46
  console.error("❌ Error building public directory index.", e);
47
47
  }
48
48
  }
49
- async function J() {
49
+ async function R() {
50
50
  try {
51
51
  console.info("🚀 Building connector configuration...");
52
- const o = JSON.parse(await n.readFile("package.json", "utf8")), e = JSON.parse(await n.readFile("config.json", "utf8")), i = await n.readFile("src/index.ts", "utf8");
53
- let t = !1, r = !1;
54
- const s = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, d = [...i.matchAll(s)].filter((c) => c[1] == null && c[2] !== "constructor").map((c) => {
52
+ const o = JSON.parse(await t.readFile("package.json", "utf8")), e = JSON.parse(await t.readFile("config.json", "utf8")), i = await t.readFile("src/index.ts", "utf8");
53
+ let n = !1, s = !1;
54
+ const r = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, d = [...i.matchAll(r)].filter((c) => c[1] == null && c[2] !== "constructor").map((c) => {
55
55
  const l = c[2];
56
- return t = t || h.includes(l), r = r || b.includes(l), l;
56
+ return n = n || E.includes(l), s = s || v.includes(l), l;
57
57
  });
58
58
  d.length > 0 ? console.info(`â„šī¸ Implements ${d.length} operations.`) : console.warn("âš ī¸ Implements no operations.");
59
- const a = r && t ? "bidirectional" : r ? "source" : t ? "destination" : "unknown";
60
- a && console.info(`â„šī¸ Supports ${a} usage.`), o.name != null && (e.id = o.name), e.operations = d, e.usageId = a, o.version != null && (e.version = o.version), await n.writeFile("config.json", JSON.stringify(e, void 0, 4), "utf8"), console.info("✅ Connector configuration built.");
59
+ const a = s && n ? "bidirectional" : s ? "source" : n ? "destination" : "unknown";
60
+ a && console.info(`â„šī¸ Supports ${a} usage.`), o.name != null && (e.id = o.name), e.operations = d, e.usageId = a, o.version != null && (e.version = o.version), await t.writeFile("config.json", JSON.stringify(e, void 0, 4), "utf8"), console.info("✅ Connector configuration built.");
61
61
  } catch (o) {
62
62
  console.error("❌ Error building connector configuration.", o);
63
63
  }
64
64
  }
65
- async function x() {
65
+ async function k() {
66
66
  try {
67
67
  console.info("🚀 Building context configuration...");
68
- const o = JSON.parse(await n.readFile("package.json", "utf8")), e = JSON.parse(await n.readFile("config.json", "utf8")), i = await n.readFile("src/index.ts", "utf8"), t = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, r = [...i.matchAll(t)].filter((s) => s[1] == null && s[2] !== "constructor").map((s) => s[2]);
69
- o.name != null && (e.id = o.name), e.operations = r, o.version != null && (e.version = o.version), await n.writeFile("config.json", JSON.stringify(e, void 0, 4), "utf8"), console.info("✅ Context configuration built.");
68
+ const o = JSON.parse(await t.readFile("package.json", "utf8")), e = JSON.parse(await t.readFile("config.json", "utf8")), i = await t.readFile("src/index.ts", "utf8"), n = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, s = [...i.matchAll(n)].filter((r) => r[1] == null && r[2] !== "constructor").map((r) => r[2]);
69
+ o.name != null && (e.id = o.name), e.operations = s, o.version != null && (e.version = o.version), await t.writeFile("config.json", JSON.stringify(e, void 0, 4), "utf8"), console.info("✅ Context configuration built.");
70
70
  } catch (o) {
71
71
  console.error("❌ Error building context configuration.", o);
72
72
  }
73
73
  }
74
- async function j() {
74
+ async function J() {
75
75
  try {
76
76
  console.info("🚀 Building presenter configuration...");
77
- const o = JSON.parse(await n.readFile("package.json", "utf8")), e = JSON.parse(await n.readFile("config.json", "utf8")), i = await n.readFile("src/index.ts", "utf8"), t = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, r = [...i.matchAll(t)].filter((s) => !s[1] && s[2] !== "constructor").map((s) => s[2]);
78
- o.name != null && (e.id = o.name), e.operations = r, o.version != null && (e.version = o.version), await n.writeFile("config.json", JSON.stringify(e, void 0, 4), "utf8"), console.info("✅ Presenter configuration built.");
77
+ const o = JSON.parse(await t.readFile("package.json", "utf8")), e = JSON.parse(await t.readFile("config.json", "utf8")), i = await t.readFile("src/index.ts", "utf8"), n = /^\s{4}(?:async\s+)?(private\s+)?(?:public\s+|protected\s+)?([A-Za-z_]\w*)\s*\(/gm, s = [...i.matchAll(n)].filter((r) => !r[1] && r[2] !== "constructor").map((r) => r[2]);
78
+ o.name != null && (e.id = o.name), e.operations = s, o.version != null && (e.version = o.version), await t.writeFile("config.json", JSON.stringify(e, void 0, 4), "utf8"), console.info("✅ Presenter configuration built.");
79
79
  } catch (o) {
80
80
  console.error("❌ Error building context configuration.", o);
81
81
  }
82
82
  }
83
- async function C(o = "./") {
83
+ async function A(o = "./") {
84
84
  try {
85
85
  console.info("🚀 Bumping version...");
86
- const e = JSON.parse(await n.readFile(`${o}package.json`, "utf8"));
86
+ const e = JSON.parse(await t.readFile(`${o}package.json`, "utf8"));
87
87
  if (e.version == null)
88
- e.version = "0.0.001", await n.writeFile(`${o}package.json`, JSON.stringify(e, void 0, 4), "utf8"), console.warn(`âš ī¸ Version initialised to ${e.version}.`);
88
+ e.version = "0.0.001", await t.writeFile(`${o}package.json`, JSON.stringify(e, void 0, 4), "utf8"), console.warn(`âš ī¸ Version initialised to ${e.version}.`);
89
89
  else {
90
- const i = e.version, t = e.version.split(".");
91
- e.version = `${t[0]}.${t[1]}.${Number(t[2]) + 1}`, await n.writeFile(`${o}package.json`, JSON.stringify(e, void 0, 4), "utf8"), console.info(`✅ Version bumped from ${i} to ${e.version}.`);
90
+ const i = e.version, n = e.version.split(".");
91
+ e.version = `${n[0]}.${n[1]}.${Number(n[2]) + 1}`, await t.writeFile(`${o}package.json`, JSON.stringify(e, void 0, 4), "utf8"), console.info(`✅ Version bumped from ${i} to ${e.version}.`);
92
92
  }
93
93
  } catch (e) {
94
94
  console.error("❌ Error bumping package version.", e);
@@ -97,35 +97,66 @@ async function C(o = "./") {
97
97
  function F(o) {
98
98
  console.error(`❌ ${o} script not implemented.`);
99
99
  }
100
- async function R() {
100
+ async function D() {
101
101
  const o = "<!-- DEPENDENCY_LICENSES_START -->", e = "<!-- DEPENDENCY_LICENSES_END -->";
102
102
  try {
103
- const i = (await n.readFile("./licenses.md", "utf8")).trim(), t = await n.readFile("./README.md", "utf8"), r = t.indexOf(o), s = t.indexOf(e);
104
- (r === -1 || s === -1) && (console.error("Error: Markers not found in README.md"), process.exit(1));
105
- const d = t.substring(0, r + o.length) + `
103
+ const i = (await t.readFile("./licenses.md", "utf8")).trim(), n = await t.readFile("./README.md", "utf8"), s = n.indexOf(o), r = n.indexOf(e);
104
+ (s === -1 || r === -1) && (console.error("Error: Markers not found in README.md"), process.exit(1));
105
+ const d = n.substring(0, s + o.length) + `
106
106
  ` + i + `
107
- ` + t.substring(s);
108
- await n.writeFile("README.md", d, "utf8"), console.log("✓ README.md updated with license information");
107
+ ` + n.substring(r);
108
+ await t.writeFile("README.md", d, "utf8"), console.log("✓ README.md updated with license information");
109
109
  } catch (i) {
110
110
  console.error("Error updating README:", i), process.exit(1);
111
111
  }
112
112
  }
113
- async function k() {
113
+ async function I() {
114
+ const o = "<!-- OWASP_BADGE_START -->", e = "<!-- OWASP_BADGE_END -->";
114
115
  try {
115
- const o = JSON.parse(await n.readFile("./dependency-check-reports/dependency-check-report.json", "utf-8"));
116
- let e = 0;
117
- for (const t of o.dependencies)
118
- t.vulnerabilities != null && (e += t.vulnerabilities.length);
119
- console.log("vulnerabilityCount", e);
120
- const i = await n.readFile("./README.md", "utf8");
121
- } catch (o) {
122
- console.error("Error updating README:", o), process.exit(1);
116
+ const i = JSON.parse(await t.readFile("./dependency-check-reports/dependency-check-report.json", "utf-8")), n = {
117
+ critical: 0,
118
+ high: 0,
119
+ moderate: 0,
120
+ low: 0,
121
+ info: 0,
122
+ unknown: 0
123
+ };
124
+ for (const u of i.dependencies)
125
+ if (u.vulnerabilities != null)
126
+ for (const m of u.vulnerabilities) {
127
+ const g = m.severity?.toLowerCase() ?? "unknown";
128
+ g in n ? n[g]++ : n.unknown++;
129
+ }
130
+ const s = {
131
+ critical: { color: "red", label: "Critical" },
132
+ high: { color: "orange", label: "High" },
133
+ moderate: { color: "yellow", label: "Moderate" },
134
+ low: { color: "green", label: "Low" },
135
+ info: { color: "brightgreen", label: "Info" },
136
+ unknown: { color: "lightgrey", label: "Unknown" }
137
+ }, r = [];
138
+ for (const [u, m] of Object.entries(n)) {
139
+ const g = s[u], y = `https://img.shields.io/badge/OWASP%20${g.label}-${m}-${g.color}`;
140
+ r.push(`[![OWASP ${g.label}](${y})](./dependency-check-reports/dependency-check-report.html)`);
141
+ }
142
+ const d = Object.values(n).reduce((u, m) => u + m, 0);
143
+ console.info(`✅ Total vulnerabilities found: ${d}`), console.info(
144
+ ` Critical: ${n.critical}, High: ${n.high}, Medium: ${n.medium}, Low: ${n.low}, Info: ${n.info}, Unknown: ${n.unknown}`
145
+ );
146
+ const a = await t.readFile("./README.md", "utf8"), c = a.indexOf(o), l = a.indexOf(e);
147
+ (c === -1 || l === -1) && (console.error("❌ Markers not found in README.md."), process.exit(1));
148
+ const f = r.join(" "), p = a.substring(0, c + o.length) + `
149
+ ` + f + `
150
+ ` + a.substring(l);
151
+ await t.writeFile("README2.md", p, "utf8"), console.info("✅ OWASP dependency check badges inserted into README.md");
152
+ } catch (i) {
153
+ console.error("❌ Error updating README with OWASP badges:", i), process.exit(1);
123
154
  }
124
155
  }
125
- async function D() {
156
+ async function M() {
126
157
  try {
127
158
  console.info("🚀 Sending deployment notice...");
128
- const o = JSON.parse(await n.readFile("config.json", "utf8")), e = {
159
+ const o = JSON.parse(await t.readFile("config.json", "utf8")), e = {
129
160
  body: JSON.stringify(o),
130
161
  headers: { "Content-Type": "application/json" },
131
162
  method: "PUT"
@@ -136,85 +167,85 @@ async function D() {
136
167
  console.error("❌ Error sending deployment notice.", o);
137
168
  }
138
169
  }
139
- async function I() {
170
+ async function P() {
140
171
  try {
141
172
  console.info("🚀 Synchronising with GitHub....");
142
- const o = JSON.parse(await n.readFile("package.json", "utf8"));
143
- await g("git add ."), await g(`git commit -m "v${o.version}"`), await g("git push origin main:main"), console.info(`✅ Synchronised version ${o.version} with GitHub.`);
173
+ const o = JSON.parse(await t.readFile("package.json", "utf8"));
174
+ await w("git add ."), await w(`git commit -m "v${o.version}"`), await w("git push origin main:main"), console.info(`✅ Synchronised version ${o.version} with GitHub.`);
144
175
  } catch (o) {
145
176
  console.error("❌ Error synchronising with GitHub.", o);
146
177
  }
147
178
  }
148
- async function A(o, e) {
179
+ async function T(o, e) {
149
180
  try {
150
181
  console.info("🚀 Uploading directory to R2....");
151
- async function i(r, s, d) {
182
+ async function i(s, r, d) {
152
183
  for (const a of d) {
153
- const c = `${r}/${a}`, l = `${s}/${a}`;
154
- if ((await n.stat(c)).isDirectory()) {
155
- const u = await n.readdir(c);
156
- await i(c, l, u);
184
+ const c = `${s}/${a}`, l = `${r}/${a}`;
185
+ if ((await t.stat(c)).isDirectory()) {
186
+ const p = await t.readdir(c);
187
+ await i(c, l, p);
157
188
  } else {
158
- console.info(`âš™ī¸ Uploading '${r}/${a}'...`);
159
- const u = `wrangler r2 object put "datapos-sample-data-eu/${s}/${a}" --file="${r}/${a}" --jurisdiction=eu --remote`, p = await g(u);
160
- if (p.stderr) throw new Error(p.stderr);
189
+ console.info(`âš™ī¸ Uploading '${s}/${a}'...`);
190
+ const p = `wrangler r2 object put "datapos-sample-data-eu/${r}/${a}" --file="${s}/${a}" --jurisdiction=eu --remote`, u = await w(p);
191
+ if (u.stderr) throw new Error(u.stderr);
161
192
  }
162
193
  }
163
194
  }
164
- const t = await n.readdir(`${o}/${e}/`);
165
- await i(`${o}/${e}`, e, t), console.info("✅ Directory uploaded to R2.");
195
+ const n = await t.readdir(`${o}/${e}/`);
196
+ await i(`${o}/${e}`, e, n), console.info("✅ Directory uploaded to R2.");
166
197
  } catch (i) {
167
198
  console.error("❌ Error uploading directory to R2.", i);
168
199
  }
169
200
  }
170
- async function M() {
201
+ async function _() {
171
202
  try {
172
203
  console.info("🚀 Uploading module configuration....");
173
- const o = JSON.parse(await n.readFile("config.json", "utf8")), e = o.id, i = {
204
+ const o = JSON.parse(await t.readFile("config.json", "utf8")), e = o.id, i = {
174
205
  body: JSON.stringify(o),
175
206
  headers: { "Content-Type": "application/json" },
176
207
  method: "PUT"
177
- }, t = await fetch(`https://api.datapos.app/states/${e}`, i);
178
- if (!t.ok) throw new Error(await t.text());
208
+ }, n = await fetch(`https://api.datapos.app/states/${e}`, i);
209
+ if (!n.ok) throw new Error(await n.text());
179
210
  console.info("✅ Module configuration uploaded.");
180
211
  } catch (o) {
181
212
  console.error("❌ Error uploading module configuration.", o);
182
213
  }
183
214
  }
184
- async function P(o) {
215
+ async function U(o) {
185
216
  try {
186
217
  console.info("🚀 Uploading module to R2...");
187
- const i = `v${JSON.parse(await n.readFile("package.json", "utf8")).version}`;
188
- async function t(r, s = "") {
189
- const d = await n.readdir(r, { withFileTypes: !0 });
218
+ const i = `v${JSON.parse(await t.readFile("package.json", "utf8")).version}`;
219
+ async function n(s, r = "") {
220
+ const d = await t.readdir(s, { withFileTypes: !0 });
190
221
  for (const a of d) {
191
- const c = `${r}/${a.name}`, l = s ? `${s}/${a.name}` : a.name;
222
+ const c = `${s}/${a.name}`, l = r ? `${r}/${a.name}` : a.name;
192
223
  if (!a.isDirectory()) {
193
- const f = `${o}_${i}/${l}`.replace(/\\/g, "/"), u = a.name.endsWith(".js") ? "application/javascript" : a.name.endsWith(".css") ? "text/css" : "application/octet-stream";
224
+ const f = `${o}_${i}/${l}`.replace(/\\/g, "/"), p = a.name.endsWith(".js") ? "application/javascript" : a.name.endsWith(".css") ? "text/css" : "application/octet-stream";
194
225
  console.info(`âš™ī¸ Uploading '${l}' → '${f}'...`);
195
- const { stderr: p } = await g(`wrangler r2 object put "${f}" --file="${c}" --content-type ${u} --jurisdiction=eu --remote`);
196
- if (p) throw new Error(p);
226
+ const { stderr: u } = await w(`wrangler r2 object put "${f}" --file="${c}" --content-type ${p} --jurisdiction=eu --remote`);
227
+ if (u) throw new Error(u);
197
228
  }
198
229
  }
199
230
  }
200
- await t("dist"), console.info("✅ Module uploaded to R2.");
231
+ await n("dist"), console.info("✅ Module uploaded to R2.");
201
232
  } catch (e) {
202
233
  console.error("❌ Error uploading module to R2.", e);
203
234
  }
204
235
  }
205
236
  export {
206
- S as buildConfig,
207
- J as buildConnectorConfig,
208
- x as buildContextConfig,
209
- j as buildPresenterConfig,
210
- O as buildPublicDirectoryIndex,
211
- C as bumpVersion,
237
+ j as buildConfig,
238
+ R as buildConnectorConfig,
239
+ k as buildContextConfig,
240
+ J as buildPresenterConfig,
241
+ C as buildPublicDirectoryIndex,
242
+ A as bumpVersion,
212
243
  F as echoScriptNotImplemented,
213
- R as insertLicensesIntoReadme,
214
- k as insertOWASPDependencyCheckBadgeIntoReadme,
215
- D as sendDeploymentNotice,
216
- I as syncWithGitHub,
217
- A as uploadDirectoryToR2,
218
- M as uploadModuleConfigToDO,
219
- P as uploadModuleToR2
244
+ D as insertLicensesIntoReadme,
245
+ I as insertOWASPDependencyCheckBadgeIntoReadme,
246
+ M as sendDeploymentNotice,
247
+ P as syncWithGitHub,
248
+ T as uploadDirectoryToR2,
249
+ _ as uploadModuleConfigToDO,
250
+ U as uploadModuleToR2
220
251
  };
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@datapos/datapos-development",
3
- "version": "0.3.114",
3
+ "version": "0.3.116",
4
4
  "description": "A library of utilities for managing the Data Positioning repositories.",
5
5
  "license": "MIT",
6
6
  "author": "Jonathan Terrell <terrell.jm@gmail.com>",